load libraries
import os
import cv2
import glob
import numpy as np
from keras.models import *
from keras.layers import *
from keras.applications import *
from keras.preprocessing.image import *
Using TensorFlow backend.
basedir = "/ext/Data/distracted_driver_detection/"
model_image_size = 299
print("-------- loading train data")
X_train = list()
y_train = list()
for i in range(10):
dir = os.path.join(basedir, "train", "c%d"%i)
image_files = glob.glob(os.path.join(dir,"*.jpg"))
print("loding {}, image count={}".format(dir, len(image_files)))
for image_file in image_files:
image = cv2.imread(image_file)
X_train.append(cv2.resize(image, (model_image_size, model_image_size)))
label = np.zeros(10, dtype=np.uint8)
label[i]=1
y_train.append(label)
X_train = np.array(X_train)
y_train = np.array(y_train)
print("-------- loading valid data")
X_valid = list()
y_valid = list()
for i in range(10):
dir = os.path.join(basedir, "valid", "c%d"%i)
image_files = glob.glob(os.path.join(dir,"*.jpg"))
print("loding {}, image count={}".format(dir, len(image_files)))
for image_file in image_files:
image = cv2.imread(image_file)
X_valid.append(cv2.resize(image, (model_image_size, model_image_size)))
label = np.zeros(10, dtype=np.uint8)
label[i]=1
y_valid.append(label)
X_valid = np.array(X_valid)
y_valid = np.array(y_valid)
-------- loading train data loding /ext/Data/distracted_driver_detection/train/c0, image count=2308 loding /ext/Data/distracted_driver_detection/train/c1, image count=2096 loding /ext/Data/distracted_driver_detection/train/c2, image count=2136 loding /ext/Data/distracted_driver_detection/train/c3, image count=2185 loding /ext/Data/distracted_driver_detection/train/c4, image count=2160 loding /ext/Data/distracted_driver_detection/train/c5, image count=2152 loding /ext/Data/distracted_driver_detection/train/c6, image count=2164 loding /ext/Data/distracted_driver_detection/train/c7, image count=1843 loding /ext/Data/distracted_driver_detection/train/c8, image count=1771 loding /ext/Data/distracted_driver_detection/train/c9, image count=1972 -------- loading valid data loding /ext/Data/distracted_driver_detection/valid/c0, image count=181 loding /ext/Data/distracted_driver_detection/valid/c1, image count=171 loding /ext/Data/distracted_driver_detection/valid/c2, image count=181 loding /ext/Data/distracted_driver_detection/valid/c3, image count=161 loding /ext/Data/distracted_driver_detection/valid/c4, image count=166 loding /ext/Data/distracted_driver_detection/valid/c5, image count=160 loding /ext/Data/distracted_driver_detection/valid/c6, image count=161 loding /ext/Data/distracted_driver_detection/valid/c7, image count=159 loding /ext/Data/distracted_driver_detection/valid/c8, image count=140 loding /ext/Data/distracted_driver_detection/valid/c9, image count=157
print(X_train.shape)
print(y_train.shape)
print(X_valid.shape)
print(y_valid.shape)
(20787, 299, 299, 3) (20787, 10) (1637, 299, 299, 3) (1637, 10)
base_model = Xception(input_tensor=Input((model_image_size, model_image_size, 3)), weights='imagenet', include_top=False)
for layers in base_model.layers:
layers.trainable = False
x = GlobalAveragePooling2D()(base_model.output)
x = Dropout(0.25)(x)
x = Dense(10, activation='softmax')(x)
model = Model(base_model.input, x)
model.compile(optimizer='adadelta', loss='binary_crossentropy', metrics=['accuracy'])
# model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
print("done")
done
model.summary()
____________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
====================================================================================================
input_1 (InputLayer) (None, 299, 299, 3) 0
____________________________________________________________________________________________________
block1_conv1 (Conv2D) (None, 149, 149, 32) 864 input_1[0][0]
____________________________________________________________________________________________________
block1_conv1_bn (BatchNormalizat (None, 149, 149, 32) 128 block1_conv1[0][0]
____________________________________________________________________________________________________
block1_conv1_act (Activation) (None, 149, 149, 32) 0 block1_conv1_bn[0][0]
____________________________________________________________________________________________________
block1_conv2 (Conv2D) (None, 147, 147, 64) 18432 block1_conv1_act[0][0]
____________________________________________________________________________________________________
block1_conv2_bn (BatchNormalizat (None, 147, 147, 64) 256 block1_conv2[0][0]
____________________________________________________________________________________________________
block1_conv2_act (Activation) (None, 147, 147, 64) 0 block1_conv2_bn[0][0]
____________________________________________________________________________________________________
block2_sepconv1 (SeparableConv2D (None, 147, 147, 128) 8768 block1_conv2_act[0][0]
____________________________________________________________________________________________________
block2_sepconv1_bn (BatchNormali (None, 147, 147, 128) 512 block2_sepconv1[0][0]
____________________________________________________________________________________________________
block2_sepconv2_act (Activation) (None, 147, 147, 128) 0 block2_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block2_sepconv2 (SeparableConv2D (None, 147, 147, 128) 17536 block2_sepconv2_act[0][0]
____________________________________________________________________________________________________
block2_sepconv2_bn (BatchNormali (None, 147, 147, 128) 512 block2_sepconv2[0][0]
____________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, 74, 74, 128) 8192 block1_conv2_act[0][0]
____________________________________________________________________________________________________
block2_pool (MaxPooling2D) (None, 74, 74, 128) 0 block2_sepconv2_bn[0][0]
____________________________________________________________________________________________________
batch_normalization_1 (BatchNorm (None, 74, 74, 128) 512 conv2d_1[0][0]
____________________________________________________________________________________________________
add_1 (Add) (None, 74, 74, 128) 0 block2_pool[0][0]
batch_normalization_1[0][0]
____________________________________________________________________________________________________
block3_sepconv1_act (Activation) (None, 74, 74, 128) 0 add_1[0][0]
____________________________________________________________________________________________________
block3_sepconv1 (SeparableConv2D (None, 74, 74, 256) 33920 block3_sepconv1_act[0][0]
____________________________________________________________________________________________________
block3_sepconv1_bn (BatchNormali (None, 74, 74, 256) 1024 block3_sepconv1[0][0]
____________________________________________________________________________________________________
block3_sepconv2_act (Activation) (None, 74, 74, 256) 0 block3_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block3_sepconv2 (SeparableConv2D (None, 74, 74, 256) 67840 block3_sepconv2_act[0][0]
____________________________________________________________________________________________________
block3_sepconv2_bn (BatchNormali (None, 74, 74, 256) 1024 block3_sepconv2[0][0]
____________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, 37, 37, 256) 32768 add_1[0][0]
____________________________________________________________________________________________________
block3_pool (MaxPooling2D) (None, 37, 37, 256) 0 block3_sepconv2_bn[0][0]
____________________________________________________________________________________________________
batch_normalization_2 (BatchNorm (None, 37, 37, 256) 1024 conv2d_2[0][0]
____________________________________________________________________________________________________
add_2 (Add) (None, 37, 37, 256) 0 block3_pool[0][0]
batch_normalization_2[0][0]
____________________________________________________________________________________________________
block4_sepconv1_act (Activation) (None, 37, 37, 256) 0 add_2[0][0]
____________________________________________________________________________________________________
block4_sepconv1 (SeparableConv2D (None, 37, 37, 728) 188672 block4_sepconv1_act[0][0]
____________________________________________________________________________________________________
block4_sepconv1_bn (BatchNormali (None, 37, 37, 728) 2912 block4_sepconv1[0][0]
____________________________________________________________________________________________________
block4_sepconv2_act (Activation) (None, 37, 37, 728) 0 block4_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block4_sepconv2 (SeparableConv2D (None, 37, 37, 728) 536536 block4_sepconv2_act[0][0]
____________________________________________________________________________________________________
block4_sepconv2_bn (BatchNormali (None, 37, 37, 728) 2912 block4_sepconv2[0][0]
____________________________________________________________________________________________________
conv2d_3 (Conv2D) (None, 19, 19, 728) 186368 add_2[0][0]
____________________________________________________________________________________________________
block4_pool (MaxPooling2D) (None, 19, 19, 728) 0 block4_sepconv2_bn[0][0]
____________________________________________________________________________________________________
batch_normalization_3 (BatchNorm (None, 19, 19, 728) 2912 conv2d_3[0][0]
____________________________________________________________________________________________________
add_3 (Add) (None, 19, 19, 728) 0 block4_pool[0][0]
batch_normalization_3[0][0]
____________________________________________________________________________________________________
block5_sepconv1_act (Activation) (None, 19, 19, 728) 0 add_3[0][0]
____________________________________________________________________________________________________
block5_sepconv1 (SeparableConv2D (None, 19, 19, 728) 536536 block5_sepconv1_act[0][0]
____________________________________________________________________________________________________
block5_sepconv1_bn (BatchNormali (None, 19, 19, 728) 2912 block5_sepconv1[0][0]
____________________________________________________________________________________________________
block5_sepconv2_act (Activation) (None, 19, 19, 728) 0 block5_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block5_sepconv2 (SeparableConv2D (None, 19, 19, 728) 536536 block5_sepconv2_act[0][0]
____________________________________________________________________________________________________
block5_sepconv2_bn (BatchNormali (None, 19, 19, 728) 2912 block5_sepconv2[0][0]
____________________________________________________________________________________________________
block5_sepconv3_act (Activation) (None, 19, 19, 728) 0 block5_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block5_sepconv3 (SeparableConv2D (None, 19, 19, 728) 536536 block5_sepconv3_act[0][0]
____________________________________________________________________________________________________
block5_sepconv3_bn (BatchNormali (None, 19, 19, 728) 2912 block5_sepconv3[0][0]
____________________________________________________________________________________________________
add_4 (Add) (None, 19, 19, 728) 0 block5_sepconv3_bn[0][0]
add_3[0][0]
____________________________________________________________________________________________________
block6_sepconv1_act (Activation) (None, 19, 19, 728) 0 add_4[0][0]
____________________________________________________________________________________________________
block6_sepconv1 (SeparableConv2D (None, 19, 19, 728) 536536 block6_sepconv1_act[0][0]
____________________________________________________________________________________________________
block6_sepconv1_bn (BatchNormali (None, 19, 19, 728) 2912 block6_sepconv1[0][0]
____________________________________________________________________________________________________
block6_sepconv2_act (Activation) (None, 19, 19, 728) 0 block6_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block6_sepconv2 (SeparableConv2D (None, 19, 19, 728) 536536 block6_sepconv2_act[0][0]
____________________________________________________________________________________________________
block6_sepconv2_bn (BatchNormali (None, 19, 19, 728) 2912 block6_sepconv2[0][0]
____________________________________________________________________________________________________
block6_sepconv3_act (Activation) (None, 19, 19, 728) 0 block6_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block6_sepconv3 (SeparableConv2D (None, 19, 19, 728) 536536 block6_sepconv3_act[0][0]
____________________________________________________________________________________________________
block6_sepconv3_bn (BatchNormali (None, 19, 19, 728) 2912 block6_sepconv3[0][0]
____________________________________________________________________________________________________
add_5 (Add) (None, 19, 19, 728) 0 block6_sepconv3_bn[0][0]
add_4[0][0]
____________________________________________________________________________________________________
block7_sepconv1_act (Activation) (None, 19, 19, 728) 0 add_5[0][0]
____________________________________________________________________________________________________
block7_sepconv1 (SeparableConv2D (None, 19, 19, 728) 536536 block7_sepconv1_act[0][0]
____________________________________________________________________________________________________
block7_sepconv1_bn (BatchNormali (None, 19, 19, 728) 2912 block7_sepconv1[0][0]
____________________________________________________________________________________________________
block7_sepconv2_act (Activation) (None, 19, 19, 728) 0 block7_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block7_sepconv2 (SeparableConv2D (None, 19, 19, 728) 536536 block7_sepconv2_act[0][0]
____________________________________________________________________________________________________
block7_sepconv2_bn (BatchNormali (None, 19, 19, 728) 2912 block7_sepconv2[0][0]
____________________________________________________________________________________________________
block7_sepconv3_act (Activation) (None, 19, 19, 728) 0 block7_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block7_sepconv3 (SeparableConv2D (None, 19, 19, 728) 536536 block7_sepconv3_act[0][0]
____________________________________________________________________________________________________
block7_sepconv3_bn (BatchNormali (None, 19, 19, 728) 2912 block7_sepconv3[0][0]
____________________________________________________________________________________________________
add_6 (Add) (None, 19, 19, 728) 0 block7_sepconv3_bn[0][0]
add_5[0][0]
____________________________________________________________________________________________________
block8_sepconv1_act (Activation) (None, 19, 19, 728) 0 add_6[0][0]
____________________________________________________________________________________________________
block8_sepconv1 (SeparableConv2D (None, 19, 19, 728) 536536 block8_sepconv1_act[0][0]
____________________________________________________________________________________________________
block8_sepconv1_bn (BatchNormali (None, 19, 19, 728) 2912 block8_sepconv1[0][0]
____________________________________________________________________________________________________
block8_sepconv2_act (Activation) (None, 19, 19, 728) 0 block8_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block8_sepconv2 (SeparableConv2D (None, 19, 19, 728) 536536 block8_sepconv2_act[0][0]
____________________________________________________________________________________________________
block8_sepconv2_bn (BatchNormali (None, 19, 19, 728) 2912 block8_sepconv2[0][0]
____________________________________________________________________________________________________
block8_sepconv3_act (Activation) (None, 19, 19, 728) 0 block8_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block8_sepconv3 (SeparableConv2D (None, 19, 19, 728) 536536 block8_sepconv3_act[0][0]
____________________________________________________________________________________________________
block8_sepconv3_bn (BatchNormali (None, 19, 19, 728) 2912 block8_sepconv3[0][0]
____________________________________________________________________________________________________
add_7 (Add) (None, 19, 19, 728) 0 block8_sepconv3_bn[0][0]
add_6[0][0]
____________________________________________________________________________________________________
block9_sepconv1_act (Activation) (None, 19, 19, 728) 0 add_7[0][0]
____________________________________________________________________________________________________
block9_sepconv1 (SeparableConv2D (None, 19, 19, 728) 536536 block9_sepconv1_act[0][0]
____________________________________________________________________________________________________
block9_sepconv1_bn (BatchNormali (None, 19, 19, 728) 2912 block9_sepconv1[0][0]
____________________________________________________________________________________________________
block9_sepconv2_act (Activation) (None, 19, 19, 728) 0 block9_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block9_sepconv2 (SeparableConv2D (None, 19, 19, 728) 536536 block9_sepconv2_act[0][0]
____________________________________________________________________________________________________
block9_sepconv2_bn (BatchNormali (None, 19, 19, 728) 2912 block9_sepconv2[0][0]
____________________________________________________________________________________________________
block9_sepconv3_act (Activation) (None, 19, 19, 728) 0 block9_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block9_sepconv3 (SeparableConv2D (None, 19, 19, 728) 536536 block9_sepconv3_act[0][0]
____________________________________________________________________________________________________
block9_sepconv3_bn (BatchNormali (None, 19, 19, 728) 2912 block9_sepconv3[0][0]
____________________________________________________________________________________________________
add_8 (Add) (None, 19, 19, 728) 0 block9_sepconv3_bn[0][0]
add_7[0][0]
____________________________________________________________________________________________________
block10_sepconv1_act (Activation (None, 19, 19, 728) 0 add_8[0][0]
____________________________________________________________________________________________________
block10_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block10_sepconv1_act[0][0]
____________________________________________________________________________________________________
block10_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block10_sepconv1[0][0]
____________________________________________________________________________________________________
block10_sepconv2_act (Activation (None, 19, 19, 728) 0 block10_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block10_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block10_sepconv2_act[0][0]
____________________________________________________________________________________________________
block10_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block10_sepconv2[0][0]
____________________________________________________________________________________________________
block10_sepconv3_act (Activation (None, 19, 19, 728) 0 block10_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block10_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block10_sepconv3_act[0][0]
____________________________________________________________________________________________________
block10_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block10_sepconv3[0][0]
____________________________________________________________________________________________________
add_9 (Add) (None, 19, 19, 728) 0 block10_sepconv3_bn[0][0]
add_8[0][0]
____________________________________________________________________________________________________
block11_sepconv1_act (Activation (None, 19, 19, 728) 0 add_9[0][0]
____________________________________________________________________________________________________
block11_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block11_sepconv1_act[0][0]
____________________________________________________________________________________________________
block11_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block11_sepconv1[0][0]
____________________________________________________________________________________________________
block11_sepconv2_act (Activation (None, 19, 19, 728) 0 block11_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block11_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block11_sepconv2_act[0][0]
____________________________________________________________________________________________________
block11_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block11_sepconv2[0][0]
____________________________________________________________________________________________________
block11_sepconv3_act (Activation (None, 19, 19, 728) 0 block11_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block11_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block11_sepconv3_act[0][0]
____________________________________________________________________________________________________
block11_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block11_sepconv3[0][0]
____________________________________________________________________________________________________
add_10 (Add) (None, 19, 19, 728) 0 block11_sepconv3_bn[0][0]
add_9[0][0]
____________________________________________________________________________________________________
block12_sepconv1_act (Activation (None, 19, 19, 728) 0 add_10[0][0]
____________________________________________________________________________________________________
block12_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block12_sepconv1_act[0][0]
____________________________________________________________________________________________________
block12_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block12_sepconv1[0][0]
____________________________________________________________________________________________________
block12_sepconv2_act (Activation (None, 19, 19, 728) 0 block12_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block12_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block12_sepconv2_act[0][0]
____________________________________________________________________________________________________
block12_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block12_sepconv2[0][0]
____________________________________________________________________________________________________
block12_sepconv3_act (Activation (None, 19, 19, 728) 0 block12_sepconv2_bn[0][0]
____________________________________________________________________________________________________
block12_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block12_sepconv3_act[0][0]
____________________________________________________________________________________________________
block12_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block12_sepconv3[0][0]
____________________________________________________________________________________________________
add_11 (Add) (None, 19, 19, 728) 0 block12_sepconv3_bn[0][0]
add_10[0][0]
____________________________________________________________________________________________________
block13_sepconv1_act (Activation (None, 19, 19, 728) 0 add_11[0][0]
____________________________________________________________________________________________________
block13_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block13_sepconv1_act[0][0]
____________________________________________________________________________________________________
block13_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block13_sepconv1[0][0]
____________________________________________________________________________________________________
block13_sepconv2_act (Activation (None, 19, 19, 728) 0 block13_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block13_sepconv2 (SeparableConv2 (None, 19, 19, 1024) 752024 block13_sepconv2_act[0][0]
____________________________________________________________________________________________________
block13_sepconv2_bn (BatchNormal (None, 19, 19, 1024) 4096 block13_sepconv2[0][0]
____________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, 10, 10, 1024) 745472 add_11[0][0]
____________________________________________________________________________________________________
block13_pool (MaxPooling2D) (None, 10, 10, 1024) 0 block13_sepconv2_bn[0][0]
____________________________________________________________________________________________________
batch_normalization_4 (BatchNorm (None, 10, 10, 1024) 4096 conv2d_4[0][0]
____________________________________________________________________________________________________
add_12 (Add) (None, 10, 10, 1024) 0 block13_pool[0][0]
batch_normalization_4[0][0]
____________________________________________________________________________________________________
block14_sepconv1 (SeparableConv2 (None, 10, 10, 1536) 1582080 add_12[0][0]
____________________________________________________________________________________________________
block14_sepconv1_bn (BatchNormal (None, 10, 10, 1536) 6144 block14_sepconv1[0][0]
____________________________________________________________________________________________________
block14_sepconv1_act (Activation (None, 10, 10, 1536) 0 block14_sepconv1_bn[0][0]
____________________________________________________________________________________________________
block14_sepconv2 (SeparableConv2 (None, 10, 10, 2048) 3159552 block14_sepconv1_act[0][0]
____________________________________________________________________________________________________
block14_sepconv2_bn (BatchNormal (None, 10, 10, 2048) 8192 block14_sepconv2[0][0]
____________________________________________________________________________________________________
block14_sepconv2_act (Activation (None, 10, 10, 2048) 0 block14_sepconv2_bn[0][0]
____________________________________________________________________________________________________
global_average_pooling2d_1 (Glob (None, 2048) 0 block14_sepconv2_act[0][0]
____________________________________________________________________________________________________
dropout_1 (Dropout) (None, 2048) 0 global_average_pooling2d_1[0][0]
____________________________________________________________________________________________________
dense_1 (Dense) (None, 10) 20490 dropout_1[0][0]
====================================================================================================
Total params: 20,881,970
Trainable params: 20,490
Non-trainable params: 20,861,480
____________________________________________________________________________________________________
model.fit(X_train, y_train, batch_size=16, epochs=10, validation_data=(X_valid, y_valid))
model.save("models/resnet50-mymodel.h5")
Train on 20787 samples, validate on 1637 samples Epoch 1/10 20787/20787 [==============================] - 188s - loss: 0.2563 - acc: 0.9037 - val_loss: 0.2671 - val_acc: 0.9017 Epoch 2/10 20787/20787 [==============================] - 182s - loss: 0.1810 - acc: 0.9268 - val_loss: 0.2505 - val_acc: 0.9062 Epoch 3/10 20787/20787 [==============================] - 182s - loss: 0.1485 - acc: 0.9442 - val_loss: 0.2450 - val_acc: 0.9081 Epoch 4/10 20787/20787 [==============================] - 181s - loss: 0.1302 - acc: 0.9529 - val_loss: 0.2363 - val_acc: 0.9091 Epoch 5/10 20787/20787 [==============================] - 181s - loss: 0.1167 - acc: 0.9592 - val_loss: 0.2397 - val_acc: 0.9100 Epoch 6/10 20787/20787 [==============================] - 180s - loss: 0.1084 - acc: 0.9622 - val_loss: 0.2443 - val_acc: 0.9045 Epoch 7/10 20787/20787 [==============================] - 181s - loss: 0.1020 - acc: 0.9655 - val_loss: 0.2533 - val_acc: 0.9042 Epoch 8/10 20787/20787 [==============================] - 180s - loss: 0.0968 - acc: 0.9671 - val_loss: 0.2495 - val_acc: 0.9028 Epoch 9/10 20787/20787 [==============================] - 177s - loss: 0.0928 - acc: 0.9686 - val_loss: 0.2457 - val_acc: 0.9043 Epoch 10/10 20787/20787 [==============================] - 177s - loss: 0.0895 - acc: 0.9699 - val_loss: 0.2496 - val_acc: 0.9029
from IPython.display import SVG
from keras.utils.vis_utils import model_to_dot
from keras.models import *
model = load_model("models/resnet50-mymodel.h5")
print("load successed")
SVG(model_to_dot(model).create(prog='dot', format='svg'))
load successed
http://cnnlocalization.csail.mit.edu/

$cam = (P-0.5)*w*output$
z = zip([x.name for x in model.layers], range(len(model.layers)))
for k, v in z:
print("{} - {}".format(k,v))
input_1 - 0 block1_conv1 - 1 block1_conv1_bn - 2 block1_conv1_act - 3 block1_conv2 - 4 block1_conv2_bn - 5 block1_conv2_act - 6 block2_sepconv1 - 7 block2_sepconv1_bn - 8 block2_sepconv2_act - 9 block2_sepconv2 - 10 block2_sepconv2_bn - 11 conv2d_1 - 12 block2_pool - 13 batch_normalization_1 - 14 add_1 - 15 block3_sepconv1_act - 16 block3_sepconv1 - 17 block3_sepconv1_bn - 18 block3_sepconv2_act - 19 block3_sepconv2 - 20 block3_sepconv2_bn - 21 conv2d_2 - 22 block3_pool - 23 batch_normalization_2 - 24 add_2 - 25 block4_sepconv1_act - 26 block4_sepconv1 - 27 block4_sepconv1_bn - 28 block4_sepconv2_act - 29 block4_sepconv2 - 30 block4_sepconv2_bn - 31 conv2d_3 - 32 block4_pool - 33 batch_normalization_3 - 34 add_3 - 35 block5_sepconv1_act - 36 block5_sepconv1 - 37 block5_sepconv1_bn - 38 block5_sepconv2_act - 39 block5_sepconv2 - 40 block5_sepconv2_bn - 41 block5_sepconv3_act - 42 block5_sepconv3 - 43 block5_sepconv3_bn - 44 add_4 - 45 block6_sepconv1_act - 46 block6_sepconv1 - 47 block6_sepconv1_bn - 48 block6_sepconv2_act - 49 block6_sepconv2 - 50 block6_sepconv2_bn - 51 block6_sepconv3_act - 52 block6_sepconv3 - 53 block6_sepconv3_bn - 54 add_5 - 55 block7_sepconv1_act - 56 block7_sepconv1 - 57 block7_sepconv1_bn - 58 block7_sepconv2_act - 59 block7_sepconv2 - 60 block7_sepconv2_bn - 61 block7_sepconv3_act - 62 block7_sepconv3 - 63 block7_sepconv3_bn - 64 add_6 - 65 block8_sepconv1_act - 66 block8_sepconv1 - 67 block8_sepconv1_bn - 68 block8_sepconv2_act - 69 block8_sepconv2 - 70 block8_sepconv2_bn - 71 block8_sepconv3_act - 72 block8_sepconv3 - 73 block8_sepconv3_bn - 74 add_7 - 75 block9_sepconv1_act - 76 block9_sepconv1 - 77 block9_sepconv1_bn - 78 block9_sepconv2_act - 79 block9_sepconv2 - 80 block9_sepconv2_bn - 81 block9_sepconv3_act - 82 block9_sepconv3 - 83 block9_sepconv3_bn - 84 add_8 - 85 block10_sepconv1_act - 86 block10_sepconv1 - 87 block10_sepconv1_bn - 88 block10_sepconv2_act - 89 block10_sepconv2 - 90 block10_sepconv2_bn - 91 block10_sepconv3_act - 92 block10_sepconv3 - 93 block10_sepconv3_bn - 94 add_9 - 95 block11_sepconv1_act - 96 block11_sepconv1 - 97 block11_sepconv1_bn - 98 block11_sepconv2_act - 99 block11_sepconv2 - 100 block11_sepconv2_bn - 101 block11_sepconv3_act - 102 block11_sepconv3 - 103 block11_sepconv3_bn - 104 add_10 - 105 block12_sepconv1_act - 106 block12_sepconv1 - 107 block12_sepconv1_bn - 108 block12_sepconv2_act - 109 block12_sepconv2 - 110 block12_sepconv2_bn - 111 block12_sepconv3_act - 112 block12_sepconv3 - 113 block12_sepconv3_bn - 114 add_11 - 115 block13_sepconv1_act - 116 block13_sepconv1 - 117 block13_sepconv1_bn - 118 block13_sepconv2_act - 119 block13_sepconv2 - 120 block13_sepconv2_bn - 121 conv2d_4 - 122 block13_pool - 123 batch_normalization_4 - 124 add_12 - 125 block14_sepconv1 - 126 block14_sepconv1_bn - 127 block14_sepconv1_act - 128 block14_sepconv2 - 129 block14_sepconv2_bn - 130 block14_sepconv2_act - 131 global_average_pooling2d_1 - 132 dropout_1 - 133 dense_1 - 134
import matplotlib.pyplot as plt
import random
%matplotlib inline
%config InlineBackend.figure_format = 'retina'
def show_heatmap_image(model_show, weights_show):
test_dir = os.path.join(basedir, "test", "test" )
image_files = glob.glob(os.path.join(test_dir,"*"))
print(len(image_files))
plt.figure(figsize=(12, 14))
for i in range(16):
plt.subplot(4, 4, i+1)
img = cv2.imread(image_files[2000*i+113])
img = cv2.resize(img, (299, 299))
x = img.copy()
x.astype(np.float32)
out, predictions = model_show.predict(np.expand_dims(x, axis=0))
predictions = predictions[0]
out = out[0]
max_idx = np.argmax(predictions)
prediction = predictions[max_idx]
status = ["safe driving", " texting - right", "phone - right", "texting - left", "phone - left",
"operation radio", "drinking", "reaching behind", "hair and makeup", "talking"]
plt.title('c%d |%s| %.2f%%' % (max_idx , status[max_idx], prediction*100))
cam = (prediction - 0.5) * np.matmul(out, weights_show)
cam = cam[:,:,max_idx]
cam -= cam.min()
cam /= cam.max()
cam -= 0.2
cam /= 0.8
cam = cv2.resize(cam, (299, 299))
heatmap = cv2.applyColorMap(np.uint8(255*cam), cv2.COLORMAP_JET)
heatmap[np.where(cam <= 0.2)] = 0
out = cv2.addWeighted(img, 0.8, heatmap, 0.4, 0)
plt.axis('off')
plt.imshow(out[:,:,::-1])
print("done")
done
weights = model.layers[134].get_weights()[0]
layer_output = model.layers[131].output
model2 = Model(model.input, [layer_output, model.output])
print("layer_output {0}".format(layer_output))
print("weights shape {0}".format(weights.shape))
show_heatmap_image(model2, weights)
layer_output Tensor("block14_sepconv2_act_1/Relu:0", shape=(?, 10, 10, 2048), dtype=float32)
weights shape (2048, 10)
79726
weights = model.layers[134].get_weights()[0]
layer_output = model.layers[130].output
model2 = Model(model.input, [layer_output, model.output])
print("layer_output {0}".format(layer_output))
print("weights shape {0}".format(weights.shape))
show_heatmap_image(model2, weights)
layer_output Tensor("block14_sepconv2_bn_1/cond/Merge:0", shape=(?, 10, 10, 2048), dtype=float32)
weights shape (2048, 10)
79726
weights = model.layers[134].get_weights()[0]
layer_output = model.layers[129].output
model2 = Model(model.input, [layer_output, model.output])
print("layer_output {0}".format(layer_output))
print("weights shape {0}".format(weights.shape))
show_heatmap_image(model2, weights)
layer_output Tensor("block14_sepconv2_1/separable_conv2d:0", shape=(?, 10, 10, 2048), dtype=float32)
weights shape (2048, 10)
79726
